Description of the procedures and analysis present in Complementary Analysis 1, Simple multinomial predictive models and diagnostic discrimination, at the Doctorate Thesis presented to the Programa de Pós-Graduação em Ciência Médicas at the Instituto D’Or de Pesquisa e Ensino as a partial requirement to obtain the Doctorate Degree.
Part of the data used here cannot be shared due to restrictions of the Ethic Comittee. Data can be shared upon reasonable request to the corresponding author. To fullfil these limitation, we will generate random data to simulate the results.
Get in touch with us (fernandahmoraes@gmail.com) in case any help is needed, our aim is to improve the code as needed!
setwd("~/GitHub/FHPdeMoraes_thesis/Complementaryanalysis2_predictivemodel")
library(readr)
library(tidyverse)
library(lubridate)
library(ggpubr)
library(kableExtra)
library(broom)
library(MASS)
library(rmarkdown)
# multinomial predictive model packages
require(foreign)
require(nnet)
require(reshape2)
library(stargazer)
require(betareg)
library(caret)
library(pROC)
test_coef <- function(reg, coefnum, val){
co <- coef(summary(reg))
tstat <- (co[coefnum,1]-val)/co[coefnum,2]
2 * pt(abs(tstat), reg$df.residual, lower.tail = FALSE)
}
dados_raw <- read_csv("dados_raw.csv")
# estimate cortical folding variables
dados_raw <- dados_raw %>%
mutate( # create new variables
localGI = TotalArea / ExposedArea,
k = sqrt(AvgThickness) * TotalArea / (ExposedArea ^ 1.25),
K = 1 / 4 * log10(AvgThickness^2) + log10(TotalArea) - 5 / 4 * log10(ExposedArea),
S = 3 / 2 * log10(TotalArea) + 3 / 4 * log10(ExposedArea) - 9 / 4 * log10(AvgThickness^2) ,
I = log10(TotalArea) + log10(ExposedArea) + log10(AvgThickness^2),
c = as.double(ifelse(ROI == "hemisphere", NA, 4 * pi / GaussianCurvature)),
TotalArea_corrected = ifelse(ROI == "hemisphere", TotalArea, TotalArea * c),
ExposedArea_corrected = ifelse(ROI == "hemisphere", ExposedArea, ExposedArea * c),
logTotalArea_corrected = log10(TotalArea_corrected),
logExposedArea_corrected = log10(ExposedArea_corrected),
localGI_corrected = ifelse(
ROI == "hemisphere",
TotalArea / ExposedArea,
TotalArea_corrected / ExposedArea_corrected
),
k_corrected = ifelse(
ROI == "hemisphere",
sqrt(AvgThickness) * log10(TotalArea) / (log10(ExposedArea) ^ 1.25),
sqrt(AvgThickness) * log10(TotalArea_corrected) / (log10(ExposedArea_corrected^1.25) )
),
K_corrected = ifelse(
ROI == "hemisphere",
1 / 4 * log10(AvgThickness^ 2)+ log10(TotalArea) - 5 / 4 * log10(ExposedArea),
1 / 4 * log10(AvgThickness^ 2) + log10(TotalArea_corrected) - 5 / 4 * log10(ExposedArea_corrected)
),
I_corrected = ifelse(
ROI == "hemisphere",
log10(TotalArea) + log10(ExposedArea) + log10(AvgThickness^ 2) ,
log10(TotalArea_corrected) + log10(ExposedArea_corrected) + log10(AvgThickness^ 2)
),
S_corrected = ifelse(
ROI == "hemisphere",
3 / 2 * log10(TotalArea) + 3 / 4 * log10(ExposedArea) - 9 / 4 * log10(AvgThickness^ 2) ,
3 / 2 * log10(TotalArea_corrected) + 3 / 4 * log10(ExposedArea_corrected) - 9 / 4 * log10(AvgThickness^ 2)
)
)
# create age intervals
dados_raw$Age_interval <- cut(dados_raw$Age,
breaks = c(0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95, 100),
right = FALSE,
include.lowest = TRUE)
dados_raw$Age_interval10 <- cut(dados_raw$Age,
breaks = c(0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100),
right = FALSE,
include.lowest = TRUE)
dados_all <- dados_raw %>% filter(
Diagnostic == "CONTROLE" |
Diagnostic == "CCL" |
Diagnostic == "ALZ", !is.na(logAvgThickness), ExposedArea != 0 | !is.na(localGI), !is.infinite(logExposedArea)) %>%
droplevels()
dados <- dados_all
# rename diagnostics
dados$Diagnostic[dados$Diagnostic == "CONTROLE"] <- "CTL"
dados$Diagnostic[dados$Diagnostic == "ALZ"] <- "AD"
dados$Diagnostic[dados$Diagnostic == "CCL"] <- "MCI"
dados$Diagnostic <- factor(dados$Diagnostic, levels = c("AD", "MCI","CTL"))
# filter data
dados <- dados %>%
filter(machine == "Philips-Achieva", # include only subjects acquired at Philips Achieva 3T
ESC == 8 | ESC > 8, # include only subjects with 8 years of scholarship or more
Session == 1) %>% # use only data from Session 1
droplevels() # delete factor levels
# define age for deaging
Age.cor = 25
## Avg thickness ----
decay_AvgThickness <-
filter(dados, Diagnostic == "CTL") %>%
droplevels() %>%
group_by(ROI) %>%
do(fit_decay_AvgThickness = tidy(rlm(AvgThickness ~ Age, data = .), conf.int=TRUE)) %>% unnest(cols = c(fit_decay_AvgThickness)) %>%
filter(term == "Age") %>%
mutate(c_AvgThickness = estimate,
std_error_c_AvgThickness = std.error) %>%
dplyr::select(c(ROI, c_AvgThickness, std_error_c_AvgThickness))
## TotalArea ----
decay_TotalArea <- filter(dados, Diagnostic == "CTL", !is.na(TotalArea), !is.nan(TotalArea), !is.infinite(TotalArea)) %>%
droplevels() %>%
group_by(ROI) %>%
do(fit_decay_TotalArea = tidy(rlm(TotalArea ~ Age, data = .),conf.int=TRUE)) %>%
unnest(cols = c(fit_decay_TotalArea)) %>%
filter(term == "Age") %>%
mutate(c_TotalArea = estimate,
std_error_c_TotalArea = std.error) %>%
dplyr::select(c(ROI, c_TotalArea, std_error_c_TotalArea))
## ExposedArea ----
decay_ExposedArea <- filter(dados, Diagnostic == "CTL", !is.na(ExposedArea), !is.nan(ExposedArea), !is.infinite(ExposedArea)) %>%
droplevels() %>%
group_by(ROI) %>%
do(fit_decay_ExposedArea = tidy(rlm(ExposedArea ~ Age, data = .), conf.int = TRUE)) %>%
unnest(cols = c(fit_decay_ExposedArea)) %>%
filter(term == "Age") %>%
mutate(c_ExposedArea = estimate,
std_error_c_ExposedArea = std.error) %>%
dplyr::select(c(ROI, c_ExposedArea, std_error_c_ExposedArea))
dados <- full_join(dados, decay_AvgThickness) %>%
full_join(decay_TotalArea) %>%
full_join(decay_ExposedArea) %>%
mutate(
AvgThickness_age_decay = AvgThickness - c_AvgThickness * (Age - Age.cor),
logAvgThickness_age_decay = log10(AvgThickness_age_decay),
TotalArea_age_decay = TotalArea - c_TotalArea * (Age - Age.cor),
logTotalArea_age_decay = log10(TotalArea_age_decay),
ExposedArea_age_decay = ExposedArea - c_ExposedArea * (Age - Age.cor),
logExposedArea_age_decay = log10(ExposedArea_age_decay),
K_age_decay = log10(TotalArea_age_decay) + 1/4*log10(AvgThickness_age_decay^2) - 5/4*log10(ExposedArea_age_decay),
I_age_decay = log10(TotalArea_age_decay) + log10(ExposedArea_age_decay) + log10(AvgThickness_age_decay^2),
S_age_decay = 3/2*log10(TotalArea_age_decay) + 3/4*log10(ExposedArea_age_decay) - 9/4*log10(AvgThickness_age_decay^2))
dados$logAvgThickness <- as.double(dados$logAvgThickness)
dados$logExposedArea<- as.double(dados$logExposedArea)
dados$logTotalArea <- as.double(dados$logTotalArea)
dados_v1 <- filter(dados, ROI == "F" | ROI == "T" | ROI == "O" | ROI == "P" | ROI == "hemisphere") %>%
droplevels()
# lobe data
dados_lobos_v1 <- unique(filter(dados, ROI == "F" | ROI == "T" | ROI == "O" | ROI == "P", !is.na(K_age_decay), SUBJ != "SUBJ211", SUBJ != "SUBJ223")) %>%
droplevels()
# hemisphere data
dados_hemi_v1 <- unique(filter(dados, ROI == "hemisphere", !is.na(K_age_decay)))
## # A tibble: 3 × 12
## Diagnostic N age age_range ESC T AT AE k K S
## <fct> <int> <chr> <chr> <chr> <chr> <chr> <chr> <chr> <chr> <chr>
## 1 AD 13 77 ± 6.1 63 ; 86 13 ± 3 2.4 … 9500… 3700… 0.28… -0.5… 9.2 …
## 2 MCI 33 72 ± 4.6 62 ; 82 13 ± … 2.5 … 9700… 3700… 0.29… -0.5… 9.2 …
## 3 CTL 77 66 ± 8.4 43 ; 80 15 ± … 2.5 … 9800… 3700… 0.3 … -0.5… 9.1 …
## # … with 1 more variable: I <chr>
## Analysis of Variance Table
##
## Response: Age
## Df Sum Sq Mean Sq F value Pr(>F)
## Diagnostic 2 3891.9 1945.97 35.9 2.188e-14 ***
## Residuals 243 13172.0 54.21
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = Age ~ Diagnostic, data = dados_hemi_v1)
##
## $Diagnostic
## diff lwr upr p adj
## MCI-AD -4.834646 -8.854706 -0.8145856 0.0136819
## CTL-AD -11.214709 -14.895884 -7.5335333 0.0000000
## CTL-MCI -6.380063 -8.934389 -3.8257371 0.0000000
## Analysis of Variance Table
##
## Response: ESC
## Df Sum Sq Mean Sq F value Pr(>F)
## Diagnostic 2 284.49 142.247 25.825 6.767e-11 ***
## Residuals 243 1338.47 5.508
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## Tukey multiple comparisons of means
## 95% family-wise confidence level
##
## Fit: aov(formula = ESC ~ Diagnostic, data = dados_hemi_v1)
##
## $Diagnostic
## diff lwr upr p adj
## MCI-AD 0.5664336 -0.7150413 1.847908 0.5509076
## CTL-AD 2.6053946 1.4319461 3.778843 0.0000011
## CTL-MCI 2.0389610 1.2247184 2.853204 0.0000000
## [1] "N sujeitos = 123"
## [1] "N sujeitos lobos = 123"
## # A tibble: 0 × 1
## # … with 1 variable: SUBJ <chr>
ggplot(data = dados_hemi_v1, aes(x = Diagnostic, y = Age, color = Diagnostic, fill = Diagnostic, alpha = 0.4)) +
geom_boxplot() + theme_pubr() +
stat_compare_means(method = "anova")
ggplot(data = dados_hemi_v1, aes(x = Diagnostic, y = K, color = Gender, fill = Gender, alpha = 0.4)) +
geom_boxplot() + theme_pubr() +
stat_compare_means(method = "anova")
dados_hemi_v1$Diagnostic <- relevel(dados_hemi_v1$Diagnostic, "CTL")
set.seed(0)
N_diag <- dados_hemi_v1 %>% dplyr::select(SUBJ, Diagnostic) %>% unique() %>% group_by(Diagnostic) %>% summarise(n_DIAG = n_distinct(SUBJ))
dados_hemi_v1_filter <- dados_hemi_v1 %>% dplyr::select(SUBJ, Diagnostic) %>% unique()
N_CTL <- as.double(floor(N_diag[1,2]*0.8))
N_CCL <- as.double(round(N_diag[3,2]*0.8))
N_ALZ <- as.double(round(N_diag[2,2]*0.8))
test.samples <- c(sample(which(dados_hemi_v1_filter$Diagnostic == "AD"), N_ALZ), sample(which(dados_hemi_v1_filter$Diagnostic == "CTL"), N_CTL), sample(which(dados_hemi_v1_filter$Diagnostic == "MCI"), N_CCL))
subj.training <- as_tibble(dados_hemi_v1_filter[-test.samples, ]$SUBJ)
colnames(subj.training) <- c("SUBJ")
train.data <- anti_join(dados_hemi_v1, subj.training)
test.data <- semi_join(dados_hemi_v1, subj.training)
caret::featurePlot(x = dados_hemi_v1[, c("K", "logAvgThickness", "K_age_decay", "logAvgThickness_age_decay")], y = dados_hemi_v1$Diagnostic, plot = "box", scales = list(y = list(relation = "free"), x = list(rot = 90)), layout = c(4, 1))
caret::featurePlot(x = dados_hemi_v1[, c("K", "I", "S")], y = dados_hemi_v1$Diagnostic, plot = "box", scales = list(y = list(relation = "free"), x = list(rot = 90)), layout = c(3,1))
caret::featurePlot(x = dados_hemi_v1[, c("K_age_decay", "I_age_decay", "S_age_decay")], y = dados_hemi_v1$Diagnostic, plot = "box", scales = list(y = list(relation = "free"), x = list(rot = 90)), layout = c(3,1))
print(n_distinct(dados_hemi_v1$SUBJ))
## [1] 123
print(n_distinct(train.data$SUBJ))
## [1] 97
print(n_distinct(test.data$SUBJ))
## [1] 26
caret::featurePlot(x = train.data[, c("K", "logAvgThickness", "K_age_decay", "logAvgThickness_age_decay")], y = train.data$Diagnostic, plot = "box", scales = list(y = list(relation = "free"), x = list(rot = 90)), layout = c(4, 1))
caret::featurePlot(x = train.data[, c("K", "K_age_decay", "I", "I_age_decay", "S", "S_age_decay")], y = train.data$Diagnostic, plot = "box", scales = list(y = list(relation = "free"), x = list(rot = 90)), layout = c(3,2))
m.multi.nova1 <-
multinom(Diagnostic ~ K + Age, data = train.data)
## # weights: 12 (6 variable)
## initial value 213.130784
## iter 10 value 142.733086
## iter 20 value 141.522216
## iter 30 value 135.869701
## iter 40 value 135.204635
## iter 50 value 135.109494
## iter 60 value 135.004106
## iter 70 value 134.949990
## iter 80 value 134.935244
## iter 90 value 134.928278
## iter 100 value 134.925355
## final value 134.925355
## stopped after 100 iterations
stargazer(m.multi.nova1, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## K -73.439*** -14.440
## (3.324) (11.464)
##
## Age 0.258*** 0.118***
## (0.067) (0.032)
##
## Constant -59.859*** -16.673***
## (3.971) (6.078)
##
## ----------------------------------------------
## Akaike Inf. Crit. 281.851 281.851
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z1 <-
summary(m.multi.nova1)$coefficients / summary(m.multi.nova1)$standard.errors
p1 <- (1 - pnorm(abs(z1), 0, 1)) * 2
t(p1)
## AD MCI
## (Intercept) 0.0000000000 0.0060857027
## K 0.0000000000 0.2077896075
## Age 0.0001078354 0.0001855142
#Para facilitar a interpreta??o:
coef.multi1 = exp(coef(m.multi.nova1))
t(coef.multi1)
## AD MCI
## (Intercept) 1.008650e-26 5.741146e-08
## K 1.275907e-32 5.353059e-07
## Age 1.294764e+00 1.125566e+00
#Previsoes
predicted.classes.multi.nova1 <-
m.multi.nova1 %>% predict(test.data, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova1 == test.data$Diagnostic)
## [1] 0.7307692
# Summary
confusionMatrix(predicted.classes.multi.nova1, test.data$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 30 2 8
## AD 0 3 1
## MCI 2 1 5
##
## Overall Statistics
##
## Accuracy : 0.7308
## 95% CI : (0.5898, 0.8443)
## No Information Rate : 0.6154
## P-Value [Acc > NIR] : 0.05604
##
## Kappa : 0.4348
##
## Mcnemar's Test P-Value : 0.13278
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 0.9375 0.50000 0.35714
## Specificity 0.5000 0.97826 0.92105
## Pos Pred Value 0.7500 0.75000 0.62500
## Neg Pred Value 0.8333 0.93750 0.79545
## Prevalence 0.6154 0.11538 0.26923
## Detection Rate 0.5769 0.05769 0.09615
## Detection Prevalence 0.7692 0.07692 0.15385
## Balanced Accuracy 0.7188 0.73913 0.63910
#ROC
multiclass.roc(
as.numeric(test.data$Diagnostic),
as.numeric(predicted.classes.multi.nova1),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = FALSE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova1), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = FALSE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova1) with 3 levels of as.numeric(test.data$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.6677
m.multi.nova2 <-
multinom(Diagnostic ~ logAvgThickness + Age, data = train.data)
## # weights: 12 (6 variable)
## initial value 213.130784
## iter 10 value 139.961542
## iter 20 value 139.672869
## final value 137.784560
## converged
stargazer(m.multi.nova2, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## logAvgThickness -48.394*** -22.859**
## (3.286) (11.304)
##
## Age 0.304*** 0.102***
## (0.066) (0.032)
##
## Constant -5.225 1.039
## (4.483) (5.520)
##
## ----------------------------------------------
## Akaike Inf. Crit. 287.569 287.569
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z2 <-
summary(m.multi.nova2)$coefficients / summary(m.multi.nova2)$standard.errors
p2 <- (1 - pnorm(abs(z2), 0, 1)) * 2
t(p2)
## AD MCI
## (Intercept) 2.437921e-01 0.850659005
## logAvgThickness 0.000000e+00 0.043151575
## Age 4.609333e-06 0.001588328
#Para facilitar a interpreta??o:
coef.multi2 = exp(coef(m.multi.nova2))
t(coef.multi2)
## AD MCI
## (Intercept) 5.380384e-03 2.827298e+00
## logAvgThickness 9.606719e-22 1.181233e-10
## Age 1.355814e+00 1.107784e+00
#Previsoes
predicted.classes.multi.nova2 <-
m.multi.nova2 %>% predict(test.data, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova2 == test.data$Diagnostic)
## [1] 0.6730769
# Summary
confusionMatrix(predicted.classes.multi.nova2, test.data$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 30 2 8
## AD 0 3 4
## MCI 2 1 2
##
## Overall Statistics
##
## Accuracy : 0.6731
## 95% CI : (0.5289, 0.7967)
## No Information Rate : 0.6154
## P-Value [Acc > NIR] : 0.23993
##
## Kappa : 0.3262
##
## Mcnemar's Test P-Value : 0.06018
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 0.9375 0.50000 0.14286
## Specificity 0.5000 0.91304 0.92105
## Pos Pred Value 0.7500 0.42857 0.40000
## Neg Pred Value 0.8333 0.93333 0.74468
## Prevalence 0.6154 0.11538 0.26923
## Detection Rate 0.5769 0.05769 0.03846
## Detection Prevalence 0.7692 0.13462 0.09615
## Balanced Accuracy 0.7188 0.70652 0.53195
#ROC
multiclass.roc(
as.numeric(test.data$Diagnostic),
as.numeric(predicted.classes.multi.nova2),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova2), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova2) with 3 levels of as.numeric(test.data$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.6892
m.multi.nova0_2 <-
multinom(Diagnostic ~ logAvgThickness + Age + ESC, data = train.data)
## # weights: 15 (8 variable)
## initial value 213.130784
## iter 10 value 132.749238
## iter 20 value 130.774868
## iter 30 value 129.107343
## iter 40 value 127.724807
## iter 50 value 127.590914
## iter 60 value 127.569589
## iter 70 value 127.554830
## iter 80 value 127.547652
## iter 90 value 127.539117
## iter 100 value 127.535290
## final value 127.535290
## stopped after 100 iterations
stargazer(m.multi.nova0_2, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## logAvgThickness -67.690*** -17.561
## (2.917) (11.642)
##
## Age 0.223*** 0.082**
## (0.065) (0.032)
##
## ESC -0.510*** -0.256***
## (0.127) (0.082)
##
## Constant 15.068*** 4.165
## (4.478) (5.908)
##
## ----------------------------------------------
## Akaike Inf. Crit. 271.071 271.071
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z0_2 <-
summary(m.multi.nova0_2)$coefficients / summary(m.multi.nova0_2)$standard.errors
p0_2 <- (1 - pnorm(abs(z0_2), 0, 1)) * 2
t(p0_2)
## AD MCI
## (Intercept) 7.649500e-04 0.480802601
## logAvgThickness 0.000000e+00 0.131473758
## Age 6.431382e-04 0.011507258
## ESC 5.683326e-05 0.001850566
#Para facilitar a interpreta??o:
coef.multi0_2 = exp(coef(m.multi.nova0_2))
t(coef.multi0_2)
## AD MCI
## (Intercept) 3.498281e+06 6.442058e+01
## logAvgThickness 4.006024e-30 2.363530e-08
## Age 1.249800e+00 1.085065e+00
## ESC 6.004399e-01 7.739285e-01
#Previsoes
predicted.classes.multi.nova0_2 <- m.multi.nova0_2 %>% predict(test.data, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova0_2 == test.data$Diagnostic)
## [1] 0.6538462
# Summary
confusionMatrix(predicted.classes.multi.nova0_2, test.data$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 26 2 3
## AD 0 1 4
## MCI 6 3 7
##
## Overall Statistics
##
## Accuracy : 0.6538
## 95% CI : (0.5091, 0.7803)
## No Information Rate : 0.6154
## P-Value [Acc > NIR] : 0.3380
##
## Kappa : 0.358
##
## Mcnemar's Test P-Value : 0.3701
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 0.8125 0.16667 0.5000
## Specificity 0.7500 0.91304 0.7632
## Pos Pred Value 0.8387 0.20000 0.4375
## Neg Pred Value 0.7143 0.89362 0.8056
## Prevalence 0.6154 0.11538 0.2692
## Detection Rate 0.5000 0.01923 0.1346
## Detection Prevalence 0.5962 0.09615 0.3077
## Balanced Accuracy 0.7812 0.53986 0.6316
#ROC
multiclass.roc(
as.numeric(test.data$Diagnostic),
as.numeric(predicted.classes.multi.nova0_2),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova0_2), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova0_2) with 3 levels of as.numeric(test.data$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.6753
m.multi.nova0 <-
multinom(Diagnostic ~ K + Age + ESC, data = train.data)
## # weights: 15 (8 variable)
## initial value 213.130784
## iter 10 value 134.116498
## iter 20 value 131.453188
## iter 30 value 129.237736
## iter 40 value 124.493246
## iter 50 value 124.078558
## iter 60 value 123.970662
## iter 70 value 123.868622
## iter 80 value 123.801585
## iter 90 value 123.732670
## iter 100 value 123.709844
## final value 123.709844
## stopped after 100 iterations
stargazer(m.multi.nova0, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## K -91.970*** -15.979
## (3.101) (11.961)
##
## Age 0.205*** 0.088***
## (0.071) (0.032)
##
## ESC -0.569*** -0.262***
## (0.135) (0.083)
##
## Constant -58.276*** -11.529*
## (4.506) (6.243)
##
## ----------------------------------------------
## Akaike Inf. Crit. 263.420 263.420
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z0 <-
summary(m.multi.nova0)$coefficients / summary(m.multi.nova0)$standard.errors
p0 <- (1 - pnorm(abs(z0), 0, 1)) * 2
t(p0)
## AD MCI
## (Intercept) 0.000000e+00 0.064771949
## K 0.000000e+00 0.181567681
## Age 3.777383e-03 0.005543928
## ESC 2.548536e-05 0.001664737
#Para facilitar a interpreta??o:
coef.multi0 = exp(coef(m.multi.nova0))
t(coef.multi0)
## AD MCI
## (Intercept) 4.910726e-26 9.840621e-06
## K 1.142471e-40 1.148666e-07
## Age 1.226948e+00 1.091918e+00
## ESC 5.661689e-01 7.698463e-01
#Previsoes
predicted.classes.multi.nova0 <- m.multi.nova0 %>% predict(test.data, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova0 == test.data$Diagnostic)
## [1] 0.7115385
# Summary
confusionMatrix(predicted.classes.multi.nova0, test.data$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 27 2 2
## AD 0 1 3
## MCI 5 3 9
##
## Overall Statistics
##
## Accuracy : 0.7115
## 95% CI : (0.5692, 0.8287)
## No Information Rate : 0.6154
## P-Value [Acc > NIR] : 0.09824
##
## Kappa : 0.4621
##
## Mcnemar's Test P-Value : 0.34964
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 0.8438 0.16667 0.6429
## Specificity 0.8000 0.93478 0.7895
## Pos Pred Value 0.8710 0.25000 0.5294
## Neg Pred Value 0.7619 0.89583 0.8571
## Prevalence 0.6154 0.11538 0.2692
## Detection Rate 0.5192 0.01923 0.1731
## Detection Prevalence 0.5962 0.07692 0.3269
## Balanced Accuracy 0.8219 0.55072 0.7162
#ROC
multiclass.roc(
as.numeric(test.data$Diagnostic),
as.numeric(predicted.classes.multi.nova0),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova0), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova0) with 3 levels of as.numeric(test.data$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.7237
m.multi.nova0_0 <-
multinom(Diagnostic ~ K + I + S + Age, data = train.data)
## # weights: 18 (10 variable)
## initial value 213.130784
## iter 10 value 137.550017
## iter 20 value 132.662085
## iter 30 value 130.676258
## iter 40 value 129.813038
## final value 129.791092
## converged
stargazer(m.multi.nova0_0, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## K -80.489*** -12.737
## (2.152) (11.995)
##
## I -11.731*** -2.646
## (3.051) (1.614)
##
## S 3.328 1.939
## (3.488) (1.967)
##
## Age 0.236*** 0.107***
## (0.067) (0.033)
##
## Constant 28.518*** -5.376***
## (0.269) (1.117)
##
## ----------------------------------------------
## Akaike Inf. Crit. 279.582 279.582
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z0_0 <-
(summary(m.multi.nova0_0)$coefficients)/(summary(m.multi.nova0_0)$standard.errors)
p0_0 <- (1 - pnorm(abs(z0_0), 0, 1)) * 2
t(p0_0)
## AD MCI
## (Intercept) 0.0000000000 1.489862e-06
## K 0.0000000000 2.882753e-01
## I 0.0001205562 1.011878e-01
## S 0.3399794301 3.243309e-01
## Age 0.0004607455 1.048672e-03
#Para facilitar a interpreta??o:
coef.multi0_0 = exp(coef(m.multi.nova0_0))
t(coef.multi0_0)
## AD MCI
## (Intercept) 2.427005e+12 4.626382e-03
## K 1.107225e-35 2.938945e-06
## I 8.036782e-06 7.092752e-02
## S 2.787740e+01 6.948562e+00
## Age 1.266412e+00 1.113474e+00
#Previsoes
predicted.classes.multi.nova0_0 <- m.multi.nova0_0 %>% predict(test.data, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova0_0 == test.data$Diagnostic)
## [1] 0.7115385
# Summary
confusionMatrix(predicted.classes.multi.nova0_0, test.data$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 30 2 8
## AD 0 3 2
## MCI 2 1 4
##
## Overall Statistics
##
## Accuracy : 0.7115
## 95% CI : (0.5692, 0.8287)
## No Information Rate : 0.6154
## P-Value [Acc > NIR] : 0.09824
##
## Kappa : 0.3981
##
## Mcnemar's Test P-Value : 0.11490
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 0.9375 0.50000 0.28571
## Specificity 0.5000 0.95652 0.92105
## Pos Pred Value 0.7500 0.60000 0.57143
## Neg Pred Value 0.8333 0.93617 0.77778
## Prevalence 0.6154 0.11538 0.26923
## Detection Rate 0.5769 0.05769 0.07692
## Detection Prevalence 0.7692 0.09615 0.13462
## Balanced Accuracy 0.7188 0.72826 0.60338
#ROC
multiclass.roc(
as.numeric(test.data$Diagnostic),
as.numeric(predicted.classes.multi.nova0_0),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = FALSE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "0_0-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova0_0), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = FALSE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "0_0-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova0_0) with 3 levels of as.numeric(test.data$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.6749
m.multi.nova5 <-
multinom(Diagnostic ~ logAvgThickness_age_decay, data = train.data)
## # weights: 9 (4 variable)
## initial value 213.130784
## iter 10 value 166.981159
## iter 20 value 164.067947
## iter 30 value 164.002188
## iter 40 value 163.992761
## iter 50 value 163.991594
## final value 163.991181
## converged
stargazer(m.multi.nova5, type = "text")
##
## ======================================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ------------------------------------------------------
## logAvgThickness_age_decay -65.069*** -21.354*
## (20.336) (12.433)
##
## Constant 25.966*** 8.343
## (8.610) (5.347)
##
## ------------------------------------------------------
## Akaike Inf. Crit. 335.982 335.982
## ======================================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z5 <-
summary(m.multi.nova5)$coefficients / summary(m.multi.nova5)$standard.errors
p5 <- (1 - pnorm(abs(z5), 0, 1)) * 2
t(p5)
## AD MCI
## (Intercept) 0.002563094 0.11869180
## logAvgThickness_age_decay 0.001375992 0.08590002
#Para facilitar a interpreta??o:
coef.multi5 = exp(coef(m.multi.nova5))
t(coef.multi5)
## AD MCI
## (Intercept) 1.891884e+11 4.201590e+03
## logAvgThickness_age_decay 5.508254e-29 5.323692e-10
#Previsoes
predicted.classes.multi.nova5 <- m.multi.nova5 %>% predict(test.data, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova5 == test.data$Diagnostic)
## [1] 0.6153846
# Summary
confusionMatrix(predicted.classes.multi.nova5, test.data$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 32 6 14
## AD 0 0 0
## MCI 0 0 0
##
## Overall Statistics
##
## Accuracy : 0.6154
## 95% CI : (0.4702, 0.747)
## No Information Rate : 0.6154
## P-Value [Acc > NIR] : 0.5608
##
## Kappa : 0
##
## Mcnemar's Test P-Value : NA
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 1.0000 0.0000 0.0000
## Specificity 0.0000 1.0000 1.0000
## Pos Pred Value 0.6154 NaN NaN
## Neg Pred Value NaN 0.8846 0.7308
## Prevalence 0.6154 0.1154 0.2692
## Detection Rate 0.6154 0.0000 0.0000
## Detection Prevalence 1.0000 0.0000 0.0000
## Balanced Accuracy 0.5000 0.5000 0.5000
#ROC
multiclass.roc(
as.numeric(test.data$Diagnostic),
as.numeric(predicted.classes.multi.nova5),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova5), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova5) with 3 levels of as.numeric(test.data$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.5
m.multi.nova4 <-
multinom(Diagnostic ~ K_age_decay, data = train.data)
## # weights: 9 (4 variable)
## initial value 213.130784
## iter 10 value 162.404108
## iter 20 value 158.863905
## iter 30 value 158.757985
## iter 40 value 158.752019
## iter 50 value 158.749489
## iter 50 value 158.749488
## iter 50 value 158.749488
## final value 158.749488
## converged
stargazer(m.multi.nova4, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## K_age_decay -95.023*** -17.521
## (22.382) (12.864)
##
## Constant -50.690*** -9.753
## (11.625) (6.544)
##
## ----------------------------------------------
## Akaike Inf. Crit. 325.499 325.499
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z4 <-
summary(m.multi.nova4)$coefficients / summary(m.multi.nova4)$standard.errors
p4 <- (1 - pnorm(abs(z4), 0, 1)) * 2
t(p4)
## AD MCI
## (Intercept) 1.298143e-05 0.1361119
## K_age_decay 2.181312e-05 0.1731774
#Para facilitar a interpreta??o:
coef.multi4 = exp(coef(m.multi.nova4))
t(coef.multi4)
## AD MCI
## (Intercept) 9.676520e-23 5.813932e-05
## K_age_decay 5.397923e-42 2.458456e-08
#Previsoes
predicted.classes.multi.nova4 <- m.multi.nova4 %>% predict(test.data, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova4 == test.data$Diagnostic)
## [1] 0.6346154
# Summary
confusionMatrix(predicted.classes.multi.nova4, test.data$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 32 5 14
## AD 0 1 0
## MCI 0 0 0
##
## Overall Statistics
##
## Accuracy : 0.6346
## 95% CI : (0.4896, 0.7638)
## No Information Rate : 0.6154
## P-Value [Acc > NIR] : 0.4477
##
## Kappa : 0.0732
##
## Mcnemar's Test P-Value : NA
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 1.0000 0.16667 0.0000
## Specificity 0.0500 1.00000 1.0000
## Pos Pred Value 0.6275 1.00000 NaN
## Neg Pred Value 1.0000 0.90196 0.7308
## Prevalence 0.6154 0.11538 0.2692
## Detection Rate 0.6154 0.01923 0.0000
## Detection Prevalence 0.9808 0.01923 0.0000
## Balanced Accuracy 0.5250 0.58333 0.5000
#ROC
multiclass.roc(
as.numeric(test.data$Diagnostic),
as.numeric(predicted.classes.multi.nova4),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova4), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova4) with 3 levels of as.numeric(test.data$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.5
m.multi.nova13 <-
multinom(Diagnostic ~ S_age_decay, data = train.data)
## # weights: 9 (4 variable)
## initial value 213.130784
## iter 10 value 170.177088
## final value 168.951934
## converged
stargazer(m.multi.nova13, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## S_age_decay 2.756 2.791
## (2.524) (1.738)
##
## Constant -26.879 -26.242*
## (22.978) (15.818)
##
## ----------------------------------------------
## Akaike Inf. Crit. 345.904 345.904
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z13 <-
summary(m.multi.nova13)$coefficients / summary(m.multi.nova13)$standard.errors
p13 <- (1 - pnorm(abs(z13), 0, 1)) * 2
t(p13)
## AD MCI
## (Intercept) 0.2421046 0.09711786
## S_age_decay 0.2747228 0.10825828
#Para facilitar a interpreta??o:
coef.multi13 = exp(coef(m.multi.nova13))
t(coef.multi13)
## AD MCI
## (Intercept) 2.122074e-12 4.009299e-12
## S_age_decay 1.574328e+01 1.630259e+01
#Previsoes
predicted.classes.multi.nova13 <- m.multi.nova13 %>% predict(test.data, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova13 == test.data$Diagnostic)
## [1] 0.6153846
# Summary
confusionMatrix(predicted.classes.multi.nova13, test.data$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 32 6 14
## AD 0 0 0
## MCI 0 0 0
##
## Overall Statistics
##
## Accuracy : 0.6154
## 95% CI : (0.4702, 0.747)
## No Information Rate : 0.6154
## P-Value [Acc > NIR] : 0.5608
##
## Kappa : 0
##
## Mcnemar's Test P-Value : NA
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 1.0000 0.0000 0.0000
## Specificity 0.0000 1.0000 1.0000
## Pos Pred Value 0.6154 NaN NaN
## Neg Pred Value NaN 0.8846 0.7308
## Prevalence 0.6154 0.1154 0.2692
## Detection Rate 0.6154 0.0000 0.0000
## Detection Prevalence 1.0000 0.0000 0.0000
## Balanced Accuracy 0.5000 0.5000 0.5000
#ROC
multiclass.roc(
as.numeric(test.data$Diagnostic),
as.numeric(predicted.classes.multi.nova13),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova13), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova13) with 3 levels of as.numeric(test.data$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.5
m.multi.nova12 <-
multinom(Diagnostic ~ I_age_decay, data = train.data)
## # weights: 9 (4 variable)
## initial value 213.130784
## iter 10 value 170.214654
## iter 20 value 167.859002
## iter 30 value 167.718820
## final value 167.702270
## converged
stargazer(m.multi.nova12, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## I_age_decay -9.790** -1.110
## (4.253) (2.830)
##
## Constant 101.074** 10.828
## (44.650) (29.779)
##
## ----------------------------------------------
## Akaike Inf. Crit. 343.405 343.405
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z12 <-
summary(m.multi.nova12)$coefficients / summary(m.multi.nova12)$standard.errors
p12 <- (1 - pnorm(abs(z12), 0, 1)) * 2
t(p12)
## AD MCI
## (Intercept) 0.02359077 0.7161469
## I_age_decay 0.02134871 0.6948876
#Para facilitar a interpreta??o:
coef.multi12 = exp(coef(m.multi.nova12))
t(coef.multi12)
## AD MCI
## (Intercept) 7.872041e+43 5.041702e+04
## I_age_decay 5.601609e-05 3.295799e-01
#Previsoes
predicted.classes.multi.nova12 <- m.multi.nova12 %>% predict(test.data, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova12 == test.data$Diagnostic)
## [1] 0.6153846
# Summary
confusionMatrix(predicted.classes.multi.nova12, test.data$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 32 6 14
## AD 0 0 0
## MCI 0 0 0
##
## Overall Statistics
##
## Accuracy : 0.6154
## 95% CI : (0.4702, 0.747)
## No Information Rate : 0.6154
## P-Value [Acc > NIR] : 0.5608
##
## Kappa : 0
##
## Mcnemar's Test P-Value : NA
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 1.0000 0.0000 0.0000
## Specificity 0.0000 1.0000 1.0000
## Pos Pred Value 0.6154 NaN NaN
## Neg Pred Value NaN 0.8846 0.7308
## Prevalence 0.6154 0.1154 0.2692
## Detection Rate 0.6154 0.0000 0.0000
## Detection Prevalence 1.0000 0.0000 0.0000
## Balanced Accuracy 0.5000 0.5000 0.5000
#ROC
multiclass.roc(
as.numeric(test.data$Diagnostic),
as.numeric(predicted.classes.multi.nova12),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova12), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova12) with 3 levels of as.numeric(test.data$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.5
m.multi.nova0_0_0 <-
multinom(Diagnostic ~ K_age_decay + I_age_decay + S_age_decay, data = train.data)
## # weights: 15 (8 variable)
## initial value 213.130784
## iter 10 value 164.287320
## iter 20 value 156.923565
## iter 30 value 155.767254
## iter 40 value 155.272125
## iter 50 value 155.110763
## iter 60 value 155.046041
## iter 70 value 154.987246
## iter 80 value 154.944244
## final value 154.934852
## converged
stargazer(m.multi.nova0_0_0, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## K_age_decay -93.732*** -10.399
## (24.627) (13.874)
##
## I_age_decay -10.519** -2.719
## (4.895) (3.098)
##
## S_age_decay 2.694 2.649
## (3.598) (2.045)
##
## Constant 36.004 -1.610
## (47.208) (30.000)
##
## ----------------------------------------------
## Akaike Inf. Crit. 325.870 325.870
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z0_0_0 <-
summary(m.multi.nova0_0_0)$coefficients / summary(m.multi.nova0_0_0)$standard.errors
p0_0_0 <- (1 - pnorm(abs(z0_0_0), 0, 1)) * 2
t(p0_0_0)
## AD MCI
## (Intercept) 0.4456633498 0.9572083
## K_age_decay 0.0001411757 0.4535389
## I_age_decay 0.0316449675 0.3801813
## S_age_decay 0.4540619565 0.1952318
#Para facilitar a interpreta??o:
coef.multi0_0_0 = exp(coef(m.multi.nova0_0_0))
t(coef.multi0_0_0)
## AD MCI
## (Intercept) 4.329343e+15 1.999447e-01
## K_age_decay 1.962109e-41 3.047167e-05
## I_age_decay 2.702717e-05 6.595267e-02
## S_age_decay 1.478523e+01 1.413660e+01
#Previsoes
predicted.classes.multi.nova0_0_0 <- m.multi.nova0_0_0 %>% predict(test.data, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova0_0_0 == test.data$Diagnostic)
## [1] 0.6346154
# Summary
confusionMatrix(predicted.classes.multi.nova0_0_0, test.data$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 32 5 14
## AD 0 1 0
## MCI 0 0 0
##
## Overall Statistics
##
## Accuracy : 0.6346
## 95% CI : (0.4896, 0.7638)
## No Information Rate : 0.6154
## P-Value [Acc > NIR] : 0.4477
##
## Kappa : 0.0732
##
## Mcnemar's Test P-Value : NA
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 1.0000 0.16667 0.0000
## Specificity 0.0500 1.00000 1.0000
## Pos Pred Value 0.6275 1.00000 NaN
## Neg Pred Value 1.0000 0.90196 0.7308
## Prevalence 0.6154 0.11538 0.2692
## Detection Rate 0.6154 0.01923 0.0000
## Detection Prevalence 0.9808 0.01923 0.0000
## Balanced Accuracy 0.5250 0.58333 0.5000
#ROC
multiclass.roc(
as.numeric(test.data$Diagnostic),
as.numeric(predicted.classes.multi.nova0_0_0),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova0_0_0), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova0_0_0) with 3 levels of as.numeric(test.data$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.5
dados_lobos_v1_T <- filter(dados_lobos_v1, ROI == "T")
dados_lobos_v1_T$Diagnostic <- relevel(dados_lobos_v1_T$Diagnostic, "CTL")
train.data_lobes <- anti_join(dados_lobos_v1_T, subj.training)
test.data_lobes <- semi_join(dados_lobos_v1_T, subj.training)
caret::featurePlot(x = dados_lobos_v1_T[, c("K", "logAvgThickness", "K_age_decay", "logAvgThickness_age_decay")], y = dados_lobos_v1_T$Diagnostic, plot = "box", scales = list(y = list(relation = "free"), x = list(rot = 90)), layout = c(4, 1))
print(n_distinct(dados_lobos_v1_T$SUBJ))
## [1] 123
print(n_distinct(train.data_lobes$SUBJ))
## [1] 97
print(n_distinct(test.data_lobes$SUBJ))
## [1] 26
caret::featurePlot(x = train.data_lobes[, c("K", "logAvgThickness", "K_age_decay", "logAvgThickness_age_decay")], y = train.data_lobes$Diagnostic, plot = "box", scales = list(y = list(relation = "free"), x = list(rot = 90)), layout = c(4, 1))
multinom1.l <- multinom(Diagnostic ~ K_corrected + Age, data = train.data_lobes)
## # weights: 12 (6 variable)
## initial value 209.834947
## iter 10 value 141.575398
## iter 20 value 140.157003
## iter 30 value 136.694806
## iter 40 value 136.396273
## iter 50 value 136.336741
## iter 60 value 136.305878
## iter 70 value 136.298883
## iter 80 value 136.289140
## iter 80 value 136.289140
## final value 136.289134
## converged
summary(multinom1.l)
## Call:
## multinom(formula = Diagnostic ~ K_corrected + Age, data = train.data_lobes)
##
## Coefficients:
## (Intercept) K_corrected Age
## AD -43.42394 -40.17592 0.2878773
## MCI -16.12119 -15.71296 0.1050940
##
## Std. Errors:
## (Intercept) K_corrected Age
## AD 8.917722 16.223212 0.06919745
## MCI 4.875215 9.573517 0.03211611
##
## Residual Deviance: 272.5783
## AIC: 284.5783
m.multi.nova1.l <-
multinom(Diagnostic ~ K_corrected + Age, data = train.data_lobes)
## # weights: 12 (6 variable)
## initial value 209.834947
## iter 10 value 141.575398
## iter 20 value 140.157003
## iter 30 value 136.694806
## iter 40 value 136.396273
## iter 50 value 136.336741
## iter 60 value 136.305878
## iter 70 value 136.298883
## iter 80 value 136.289140
## iter 80 value 136.289140
## final value 136.289134
## converged
stargazer(m.multi.nova1.l, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## K_corrected -40.176** -15.713
## (16.223) (9.574)
##
## Age 0.288*** 0.105***
## (0.069) (0.032)
##
## Constant -43.424*** -16.121***
## (8.918) (4.875)
##
## ----------------------------------------------
## Akaike Inf. Crit. 284.578 284.578
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z1.l <-
summary(m.multi.nova1.l)$coefficients / summary(m.multi.nova1.l)$standard.errors
p1.l <- (1 - pnorm(abs(z1.l), 0, 1)) * 2
t(p1.l)
## AD MCI
## (Intercept) 1.119388e-06 0.0009438013
## K_corrected 1.326975e-02 0.1007363564
## Age 3.179273e-05 0.0010667055
#Para facilitar a interpreta??o:
coef.multi1.l = exp(coef(m.multi.nova1.l))
t(coef.multi1.l)
## AD MCI
## (Intercept) 1.384278e-19 9.969114e-08
## K_corrected 3.563037e-18 1.499509e-07
## Age 1.333594e+00 1.110815e+00
#Previsoes
predicted.classes.multi.nova1.l <- m.multi.nova1.l %>% predict(test.data_lobes, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova1.l == test.data_lobes$Diagnostic)
## [1] 0.627451
# Summary
cM1.l <- confusionMatrix(predicted.classes.multi.nova1.l, test.data_lobes$Diagnostic)
cM1.l
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 29 2 9
## AD 0 2 4
## MCI 2 2 1
##
## Overall Statistics
##
## Accuracy : 0.6275
## 95% CI : (0.4808, 0.7587)
## No Information Rate : 0.6078
## P-Value [Acc > NIR] : 0.44711
##
## Kappa : 0.2279
##
## Mcnemar's Test P-Value : 0.06813
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 0.9355 0.33333 0.07143
## Specificity 0.4500 0.91111 0.89189
## Pos Pred Value 0.7250 0.33333 0.20000
## Neg Pred Value 0.8182 0.91111 0.71739
## Prevalence 0.6078 0.11765 0.27451
## Detection Rate 0.5686 0.03922 0.01961
## Detection Prevalence 0.7843 0.11765 0.09804
## Balanced Accuracy 0.6927 0.62222 0.48166
#cM1.l.t.score <- mutate(cM1.l, Diagnostic = c("AD", "MCI", "CTL"),sensitivity = as.data.frame(cM1.l$byClass[,1]), specificity = as.data.frame(cM1.l$byClass[,2]))
#ROC
multiclass.roc(
as.numeric(test.data_lobes$Diagnostic),
as.numeric(predicted.classes.multi.nova1.l),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data_lobes$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova1.l), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova1.l) with 3 levels of as.numeric(test.data_lobes$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.706
multinom2.l <- multinom(Diagnostic ~ logAvgThickness + Age, data = train.data_lobes)
## # weights: 12 (6 variable)
## initial value 209.834947
## iter 10 value 136.015187
## iter 20 value 134.489485
## iter 30 value 129.671781
## iter 40 value 128.743859
## iter 50 value 128.122834
## iter 60 value 127.916315
## iter 70 value 127.888696
## iter 80 value 127.873219
## iter 90 value 127.866424
## final value 127.865462
## converged
summary(multinom2.l)
## Call:
## multinom(formula = Diagnostic ~ logAvgThickness + Age, data = train.data_lobes)
##
## Coefficients:
## (Intercept) logAvgThickness Age
## AD 14.236634 -82.89300 0.27513007
## MCI 6.076846 -29.83423 0.09138058
##
## Std. Errors:
## (Intercept) logAvgThickness Age
## AD 4.474889 3.108388 0.07002867
## MCI 5.260717 9.641075 0.03258314
##
## Residual Deviance: 255.7309
## AIC: 267.7309
m.multi.nova2.l <-
multinom(Diagnostic ~ logAvgThickness + Age, data = train.data_lobes)
## # weights: 12 (6 variable)
## initial value 209.834947
## iter 10 value 136.015187
## iter 20 value 134.489485
## iter 30 value 129.671781
## iter 40 value 128.743859
## iter 50 value 128.122834
## iter 60 value 127.916315
## iter 70 value 127.888696
## iter 80 value 127.873219
## iter 90 value 127.866424
## final value 127.865462
## converged
stargazer(m.multi.nova2.l, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## logAvgThickness -82.893*** -29.834***
## (3.108) (9.641)
##
## Age 0.275*** 0.091***
## (0.070) (0.033)
##
## Constant 14.237*** 6.077
## (4.475) (5.261)
##
## ----------------------------------------------
## Akaike Inf. Crit. 267.731 267.731
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z2.l <-
summary(m.multi.nova2.l)$coefficients / summary(m.multi.nova2.l)$standard.errors
p2.l <- (1 - pnorm(abs(z2.l), 0, 1)) * 2
t(p2.l)
## AD MCI
## (Intercept) 1.465400e-03 0.248034632
## logAvgThickness 0.000000e+00 0.001971505
## Age 8.536362e-05 0.005038911
#Para facilitar a interpreta??o:
coef.multi2.l = exp(coef(m.multi.nova2.l))
t(coef.multi2.l)
## AD MCI
## (Intercept) 1.523672e+06 4.356528e+02
## logAvgThickness 1.000066e-36 1.104487e-13
## Age 1.316702e+00 1.095686e+00
#Previsoes
predicted.classes.multi.nova2.l <- m.multi.nova2.l %>% predict(test.data_lobes, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova2.l == test.data_lobes$Diagnostic)
## [1] 0.6862745
# Summary
confusionMatrix(predicted.classes.multi.nova2.l, test.data_lobes$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 31 3 9
## AD 0 3 4
## MCI 0 0 1
##
## Overall Statistics
##
## Accuracy : 0.6863
## 95% CI : (0.5411, 0.8089)
## No Information Rate : 0.6078
## P-Value [Acc > NIR] : 0.157693
##
## Kappa : 0.3267
##
## Mcnemar's Test P-Value : 0.001134
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 1.0000 0.50000 0.07143
## Specificity 0.4000 0.91111 1.00000
## Pos Pred Value 0.7209 0.42857 1.00000
## Neg Pred Value 1.0000 0.93182 0.74000
## Prevalence 0.6078 0.11765 0.27451
## Detection Rate 0.6078 0.05882 0.01961
## Detection Prevalence 0.8431 0.13725 0.01961
## Balanced Accuracy 0.7000 0.70556 0.53571
#ROC
multiclass.roc(
as.numeric(test.data_lobes$Diagnostic),
as.numeric(predicted.classes.multi.nova2.l),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data_lobes$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova2.l), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova2.l) with 3 levels of as.numeric(test.data_lobes$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.6607
multinom4.l <- multinom(Diagnostic ~ K_age_decay, data = train.data_lobes)
## # weights: 9 (4 variable)
## initial value 209.834947
## iter 10 value 160.199084
## iter 20 value 157.549833
## iter 30 value 157.399255
## iter 40 value 157.379758
## iter 50 value 157.376188
## iter 60 value 157.375429
## final value 157.375345
## converged
summary(multinom4.l)
## Call:
## multinom(formula = Diagnostic ~ K_age_decay, data = train.data_lobes)
##
## Coefficients:
## (Intercept) K_age_decay
## AD -24.267287 -65.42324
## MCI -9.872672 -26.65515
##
## Std. Errors:
## (Intercept) K_age_decay
## AD 5.588240 16.00522
## MCI 3.625336 10.66693
##
## Residual Deviance: 314.7507
## AIC: 322.7507
m.multi.nova4.l <-
multinom(Diagnostic ~ K_age_decay, data = train.data_lobes)
## # weights: 9 (4 variable)
## initial value 209.834947
## iter 10 value 160.199084
## iter 20 value 157.549833
## iter 30 value 157.399255
## iter 40 value 157.379758
## iter 50 value 157.376188
## iter 60 value 157.375429
## final value 157.375345
## converged
stargazer(m.multi.nova4.l, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## K_age_decay -65.423*** -26.655**
## (16.005) (10.667)
##
## Constant -24.267*** -9.873***
## (5.588) (3.625)
##
## ----------------------------------------------
## Akaike Inf. Crit. 322.751 322.751
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z4.l <-
summary(m.multi.nova4.l)$coefficients / summary(m.multi.nova4.l)$standard.errors
p4.l <- (1 - pnorm(abs(z4.l), 0, 1)) * 2
t(p4.l)
## AD MCI
## (Intercept) 1.408296e-05 0.006464437
## K_age_decay 4.358220e-05 0.012459348
#Para facilitar a interpreta??o:
coef.multi4.l = exp(coef(m.multi.nova4.l))
t(coef.multi4.l)
## AD MCI
## (Intercept) 2.889689e-11 5.156477e-05
## K_age_decay 3.864098e-29 2.653467e-12
#Previsoes
predicted.classes.multi.nova4.l <- m.multi.nova4.l %>% predict(test.data_lobes, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova4.l == test.data_lobes$Diagnostic)
## [1] 0.627451
# Summary
confusionMatrix(predicted.classes.multi.nova4.l, test.data_lobes$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 31 5 14
## AD 0 1 0
## MCI 0 0 0
##
## Overall Statistics
##
## Accuracy : 0.6275
## 95% CI : (0.4808, 0.7587)
## No Information Rate : 0.6078
## P-Value [Acc > NIR] : 0.4471
##
## Kappa : 0.0727
##
## Mcnemar's Test P-Value : NA
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 1.0000 0.16667 0.0000
## Specificity 0.0500 1.00000 1.0000
## Pos Pred Value 0.6200 1.00000 NaN
## Neg Pred Value 1.0000 0.90000 0.7255
## Prevalence 0.6078 0.11765 0.2745
## Detection Rate 0.6078 0.01961 0.0000
## Detection Prevalence 0.9804 0.01961 0.0000
## Balanced Accuracy 0.5250 0.58333 0.5000
#ROC
multiclass.roc(
as.numeric(test.data_lobes$Diagnostic),
as.numeric(predicted.classes.multi.nova4.l),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data_lobes$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova4.l), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova4.l) with 3 levels of as.numeric(test.data_lobes$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.5
multinom5.l <- multinom(Diagnostic ~ logAvgThickness_age_decay, data = train.data_lobes)
## # weights: 9 (4 variable)
## initial value 209.834947
## iter 10 value 154.724249
## iter 20 value 151.550136
## iter 30 value 151.325486
## iter 40 value 151.291338
## final value 151.285895
## converged
summary(multinom5.l)
## Call:
## multinom(formula = Diagnostic ~ logAvgThickness_age_decay, data = train.data_lobes)
##
## Coefficients:
## (Intercept) logAvgThickness_age_decay
## AD 37.09813 -81.53162
## MCI 15.28784 -33.37652
##
## Std. Errors:
## (Intercept) logAvgThickness_age_decay
## AD 7.941797 16.83349
## MCI 5.301538 10.98848
##
## Residual Deviance: 302.5718
## AIC: 310.5718
m.multi.nova5.l <-
multinom(Diagnostic ~ logAvgThickness_age_decay, data = train.data_lobes)
## # weights: 9 (4 variable)
## initial value 209.834947
## iter 10 value 154.724249
## iter 20 value 151.550136
## iter 30 value 151.325486
## iter 40 value 151.291338
## final value 151.285895
## converged
stargazer(m.multi.nova5.l, type = "text")
##
## ======================================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ------------------------------------------------------
## logAvgThickness_age_decay -81.532*** -33.377***
## (16.833) (10.988)
##
## Constant 37.098*** 15.288***
## (7.942) (5.302)
##
## ------------------------------------------------------
## Akaike Inf. Crit. 310.572 310.572
## ======================================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z5.l <-
summary(m.multi.nova5.l)$coefficients / summary(m.multi.nova5.l)$standard.errors
p5.l <- (1 - pnorm(abs(z5.l), 0, 1)) * 2
t(p5.l)
## AD MCI
## (Intercept) 2.993712e-06 0.003930809
## logAvgThickness_age_decay 1.276250e-06 0.002386216
#Para facilitar a interpreta??o:
coef.multi5.l = exp(coef(m.multi.nova5.l))
t(coef.multi5.l)
## AD MCI
## (Intercept) 1.292742e+16 4.359390e+06
## logAvgThickness_age_decay 3.901826e-36 3.197153e-15
#Previsoes
predicted.classes.multi.nova5.l <- m.multi.nova5.l %>% predict(test.data_lobes, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova5.l == test.data_lobes$Diagnostic)
## [1] 0.6470588
# Summary
confusionMatrix(predicted.classes.multi.nova5.l, test.data_lobes$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 31 5 12
## AD 0 1 1
## MCI 0 0 1
##
## Overall Statistics
##
## Accuracy : 0.6471
## 95% CI : (0.5007, 0.7757)
## No Information Rate : 0.6078
## P-Value [Acc > NIR] : 0.3368468
##
## Kappa : 0.1555
##
## Mcnemar's Test P-Value : 0.0004398
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 1.0000 0.16667 0.07143
## Specificity 0.1500 0.97778 1.00000
## Pos Pred Value 0.6458 0.50000 1.00000
## Neg Pred Value 1.0000 0.89796 0.74000
## Prevalence 0.6078 0.11765 0.27451
## Detection Rate 0.6078 0.01961 0.01961
## Detection Prevalence 0.9412 0.03922 0.01961
## Balanced Accuracy 0.5750 0.57222 0.53571
#ROC
multiclass.roc(
as.numeric(test.data_lobes$Diagnostic),
as.numeric(predicted.classes.multi.nova5.l),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data_lobes$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova5.l), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova5.l) with 3 levels of as.numeric(test.data_lobes$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.5496
multinom0.l <- multinom(Diagnostic ~ K_corrected + Age + ESC, data = train.data_lobes)
## # weights: 15 (8 variable)
## initial value 209.834947
## iter 10 value 133.159250
## iter 20 value 129.925225
## iter 30 value 127.989850
## iter 40 value 126.403215
## iter 50 value 125.931577
## iter 60 value 125.672273
## iter 70 value 125.531204
## iter 80 value 125.472788
## iter 90 value 125.397238
## iter 100 value 125.382025
## final value 125.382025
## stopped after 100 iterations
summary(multinom0.l)
## Call:
## multinom(formula = Diagnostic ~ K_corrected + Age + ESC, data = train.data_lobes)
##
## Coefficients:
## (Intercept) K_corrected Age ESC
## AD -36.39525 -52.84894 0.20199625 -0.5360482
## MCI -11.59963 -19.55334 0.07129166 -0.2803732
##
## Std. Errors:
## (Intercept) K_corrected Age ESC
## AD 4.885115 3.260441 0.07226061 0.13001756
## MCI 4.386738 8.728306 0.03255077 0.08451055
##
## Residual Deviance: 250.7641
## AIC: 266.7641
m.multi.nova0.l <-
multinom(Diagnostic ~ K_corrected + Age + ESC, data = train.data_lobes)
## # weights: 15 (8 variable)
## initial value 209.834947
## iter 10 value 133.159250
## iter 20 value 129.925225
## iter 30 value 127.989850
## iter 40 value 126.403215
## iter 50 value 125.931577
## iter 60 value 125.672273
## iter 70 value 125.531204
## iter 80 value 125.472788
## iter 90 value 125.397238
## iter 100 value 125.382025
## final value 125.382025
## stopped after 100 iterations
stargazer(m.multi.nova0.l, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## K_corrected -52.849*** -19.553**
## (3.260) (8.728)
##
## Age 0.202*** 0.071**
## (0.072) (0.033)
##
## ESC -0.536*** -0.280***
## (0.130) (0.085)
##
## Constant -36.395*** -11.600***
## (4.885) (4.387)
##
## ----------------------------------------------
## Akaike Inf. Crit. 266.764 266.764
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z0.l <-
summary(m.multi.nova0.l)$coefficients / summary(m.multi.nova0.l)$standard.errors
p0.l <- (1 - pnorm(abs(z0.l), 0, 1)) * 2
t(p0.l)
## AD MCI
## (Intercept) 9.325873e-14 0.0081872104
## K_corrected 0.000000e+00 0.0250765183
## Age 5.183787e-03 0.0285120425
## ESC 3.741465e-05 0.0009079076
coef.multi0.l = exp(coef(m.multi.nova0.l))
t(coef.multi0.l)
## AD MCI
## (Intercept) 1.562223e-16 9.169462e-06
## K_corrected 1.116851e-23 3.221746e-09
## Age 1.223843e+00 1.073894e+00
## ESC 5.850557e-01 7.555018e-01
#Previsoes
predicted.classes.multi.nova0.l <- m.multi.nova0.l %>% predict(test.data_lobes, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova0.l == test.data_lobes$Diagnostic)
## [1] 0.6666667
# Summary
confusionMatrix(predicted.classes.multi.nova0.l, test.data_lobes$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 27 3 2
## AD 0 0 5
## MCI 4 3 7
##
## Overall Statistics
##
## Accuracy : 0.6667
## 95% CI : (0.5208, 0.7924)
## No Information Rate : 0.6078
## P-Value [Acc > NIR] : 0.2384
##
## Kappa : 0.3731
##
## Mcnemar's Test P-Value : 0.2440
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 0.8710 0.00000 0.5000
## Specificity 0.7500 0.88889 0.8108
## Pos Pred Value 0.8438 0.00000 0.5000
## Neg Pred Value 0.7895 0.86957 0.8108
## Prevalence 0.6078 0.11765 0.2745
## Detection Rate 0.5294 0.00000 0.1373
## Detection Prevalence 0.6275 0.09804 0.2745
## Balanced Accuracy 0.8105 0.44444 0.6554
#ROC
multiclass.roc(
as.numeric(test.data_lobes$Diagnostic),
as.numeric(predicted.classes.multi.nova0.l),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data_lobes$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova0.l), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova0.l) with 3 levels of as.numeric(test.data_lobes$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.7053
###LogT + Age + YS
multinom0_2.l <- multinom(Diagnostic ~ logAvgThickness + Age + ESC, data = train.data_lobes)
## # weights: 15 (8 variable)
## initial value 209.834947
## iter 10 value 130.024743
## iter 20 value 126.633979
## iter 30 value 123.999813
## iter 40 value 120.794615
## iter 50 value 120.084969
## iter 60 value 119.740364
## iter 70 value 119.393383
## iter 80 value 119.273223
## iter 90 value 119.045392
## iter 100 value 118.991985
## final value 118.991985
## stopped after 100 iterations
summary(multinom0_2.l)
## Call:
## multinom(formula = Diagnostic ~ logAvgThickness + Age + ESC,
## data = train.data_lobes)
##
## Coefficients:
## (Intercept) logAvgThickness Age ESC
## AD 28.61174 -88.60259 0.20756564 -0.5185432
## MCI 11.18226 -28.95656 0.06542288 -0.2510547
##
## Std. Errors:
## (Intercept) logAvgThickness Age ESC
## AD 5.007258 3.313346 0.07327305 0.13796034
## MCI 5.623284 9.779241 0.03278654 0.08433308
##
## Residual Deviance: 237.984
## AIC: 253.984
m.multi.nova0_2.l <-
multinom(Diagnostic ~ logAvgThickness + Age + ESC, data = train.data_lobes)
## # weights: 15 (8 variable)
## initial value 209.834947
## iter 10 value 130.024743
## iter 20 value 126.633979
## iter 30 value 123.999813
## iter 40 value 120.794615
## iter 50 value 120.084969
## iter 60 value 119.740364
## iter 70 value 119.393383
## iter 80 value 119.273223
## iter 90 value 119.045392
## iter 100 value 118.991985
## final value 118.991985
## stopped after 100 iterations
stargazer(m.multi.nova0_2.l, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## AD MCI
## (1) (2)
## ----------------------------------------------
## logAvgThickness -88.603*** -28.957***
## (3.313) (9.779)
##
## Age 0.208*** 0.065**
## (0.073) (0.033)
##
## ESC -0.519*** -0.251***
## (0.138) (0.084)
##
## Constant 28.612*** 11.182**
## (5.007) (5.623)
##
## ----------------------------------------------
## Akaike Inf. Crit. 253.984 253.984
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z0_2.l <-
summary(m.multi.nova0_2.l)$coefficients / summary(m.multi.nova0_2.l)$standard.errors
p0_2.l <- (1 - pnorm(abs(z0_2.l), 0, 1)) * 2
t(p0_2.l)
## AD MCI
## (Intercept) 1.103160e-08 0.046749408
## logAvgThickness 0.000000e+00 0.003066186
## Age 4.614669e-03 0.045997180
## ESC 1.708395e-04 0.002911391
#Para facilitar a interpreta??o:
coef.multi0_2.l = exp(coef(m.multi.nova0_2.l))
t(coef.multi0_2.l)
## AD MCI
## (Intercept) 2.666383e+12 7.184434e+04
## logAvgThickness 3.314250e-39 2.656594e-13
## Age 1.230678e+00 1.067610e+00
## ESC 5.953873e-01 7.779798e-01
#Previsoes
predicted.classes.multi.nova0_2.l <- m.multi.nova0_2.l %>% predict(test.data_lobes, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova0_2.l == test.data_lobes$Diagnostic)
## [1] 0.5686275
# Summary
confusionMatrix(predicted.classes.multi.nova0_2.l, test.data_lobes$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction CTL AD MCI
## CTL 25 1 8
## AD 0 1 3
## MCI 6 4 3
##
## Overall Statistics
##
## Accuracy : 0.5686
## 95% CI : (0.4225, 0.7065)
## No Information Rate : 0.6078
## P-Value [Acc > NIR] : 0.7647
##
## Kappa : 0.1633
##
## Mcnemar's Test P-Value : 0.6989
##
## Statistics by Class:
##
## Class: CTL Class: AD Class: MCI
## Sensitivity 0.8065 0.16667 0.21429
## Specificity 0.5500 0.93333 0.72973
## Pos Pred Value 0.7353 0.25000 0.23077
## Neg Pred Value 0.6471 0.89362 0.71053
## Prevalence 0.6078 0.11765 0.27451
## Detection Rate 0.4902 0.01961 0.05882
## Detection Prevalence 0.6667 0.07843 0.25490
## Balanced Accuracy 0.6782 0.55000 0.47201
#ROC
multiclass.roc(
as.numeric(test.data_lobes$Diagnostic),
as.numeric(predicted.classes.multi.nova0_2.l),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data_lobes$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova0_2.l), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova0_2.l) with 3 levels of as.numeric(test.data_lobes$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.7188
dados_lobos_v1_P <- filter(dados_lobos_v1, ROI == "P")
dados_lobos_v1_P$Diagnostic <- factor(dados_lobos_v1_P$Diagnostic, levels = c("AD", "MCI","CTL"))
train.data_lobes <- anti_join(dados_lobos_v1_P, subj.training)
test.data_lobes <- semi_join(dados_lobos_v1_P, subj.training)
caret::featurePlot(x = dados_lobos_v1_P[, c("K", "logAvgThickness", "K_age_decay", "logAvgThickness_age_decay")], y = dados_lobos_v1_P$Diagnostic, plot = "box", scales = list(y = list(relation = "free"), x = list(rot = 90)), layout = c(4, 1))
print(n_distinct(dados_lobos_v1_P$SUBJ))
## [1] 123
print(n_distinct(train.data_lobes$SUBJ))
## [1] 97
print(n_distinct(test.data_lobes$SUBJ))
## [1] 26
caret::featurePlot(x = train.data_lobes[, c("K", "logAvgThickness", "K_age_decay", "logAvgThickness_age_decay")], y = train.data_lobes$Diagnostic, plot = "box", scales = list(y = list(relation = "free"), x = list(rot = 90)), layout = c(4, 1))
multinom1.l <- multinom(Diagnostic ~ K_corrected + Age, data = train.data_lobes)
## # weights: 12 (6 variable)
## initial value 209.834947
## iter 10 value 141.267129
## iter 20 value 138.460587
## iter 30 value 136.975934
## iter 40 value 136.208586
## iter 50 value 135.793166
## iter 60 value 135.565539
## iter 70 value 135.425613
## iter 80 value 135.319929
## iter 90 value 135.270924
## iter 100 value 135.234107
## final value 135.234107
## stopped after 100 iterations
summary(multinom1.l)
## Call:
## multinom(formula = Diagnostic ~ K_corrected + Age, data = train.data_lobes)
##
## Coefficients:
## (Intercept) K_corrected Age
## MCI 32.62419 41.41588 -0.1353049
## CTL 40.78570 38.92756 -0.2590214
##
## Std. Errors:
## (Intercept) K_corrected Age
## MCI 4.215444 4.897229 0.06421664
## CTL 4.549258 4.656325 0.06446859
##
## Residual Deviance: 270.4682
## AIC: 282.4682
m.multi.nova1.l <-
multinom(Diagnostic ~ K_corrected + Age, data = train.data_lobes)
## # weights: 12 (6 variable)
## initial value 209.834947
## iter 10 value 141.267129
## iter 20 value 138.460587
## iter 30 value 136.975934
## iter 40 value 136.208586
## iter 50 value 135.793166
## iter 60 value 135.565539
## iter 70 value 135.425613
## iter 80 value 135.319929
## iter 90 value 135.270924
## iter 100 value 135.234107
## final value 135.234107
## stopped after 100 iterations
stargazer(m.multi.nova1.l, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## MCI CTL
## (1) (2)
## ----------------------------------------------
## K_corrected 41.416*** 38.928***
## (4.897) (4.656)
##
## Age -0.135** -0.259***
## (0.064) (0.064)
##
## Constant 32.624*** 40.786***
## (4.215) (4.549)
##
## ----------------------------------------------
## Akaike Inf. Crit. 282.468 282.468
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z1.l <-
summary(m.multi.nova1.l)$coefficients / summary(m.multi.nova1.l)$standard.errors
p1.l <- (1 - pnorm(abs(z1.l), 0, 1)) * 2
t(p1.l)
## MCI CTL
## (Intercept) 9.992007e-15 0.000000e+00
## K_corrected 0.000000e+00 0.000000e+00
## Age 3.511694e-02 5.874584e-05
#Para facilitar a interpreta??o:
coef.multi1.l = exp(coef(m.multi.nova1.l))
t(coef.multi1.l)
## MCI CTL
## (Intercept) 1.474021e+14 5.164209e+17
## K_corrected 9.698138e+17 8.054249e+16
## Age 8.734495e-01 7.718065e-01
#Previsoes
predicted.classes.multi.nova1.l <- m.multi.nova1.l %>% predict(test.data_lobes, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova1.l == test.data_lobes$Diagnostic)
## [1] 0.7254902
# Summary
cM1.l <- confusionMatrix(predicted.classes.multi.nova1.l, test.data_lobes$Diagnostic)
cM1.l
## Confusion Matrix and Statistics
##
## Reference
## Prediction AD MCI CTL
## AD 3 1 1
## MCI 1 5 1
## CTL 2 8 29
##
## Overall Statistics
##
## Accuracy : 0.7255
## 95% CI : (0.5826, 0.8411)
## No Information Rate : 0.6078
## P-Value [Acc > NIR] : 0.05502
##
## Kappa : 0.4351
##
## Mcnemar's Test P-Value : 0.12294
##
## Statistics by Class:
##
## Class: AD Class: MCI Class: CTL
## Sensitivity 0.50000 0.35714 0.9355
## Specificity 0.95556 0.94595 0.5000
## Pos Pred Value 0.60000 0.71429 0.7436
## Neg Pred Value 0.93478 0.79545 0.8333
## Prevalence 0.11765 0.27451 0.6078
## Detection Rate 0.05882 0.09804 0.5686
## Detection Prevalence 0.09804 0.13725 0.7647
## Balanced Accuracy 0.72778 0.65154 0.7177
#cM1.l.t.score <- mutate(cM1.l, Diagnostic = c("AD", "MCI", "CTL"),sensitivity = as.data.frame(cM1.l$byClass[,1]), specificity = as.data.frame(cM1.l$byClass[,2]))
#ROC
multiclass.roc(
as.numeric(test.data_lobes$Diagnostic),
as.numeric(predicted.classes.multi.nova1.l),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data_lobes$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova1.l), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova1.l) with 3 levels of as.numeric(test.data_lobes$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.7288
multinom2.l <- multinom(Diagnostic ~ logAvgThickness + Age, data = train.data_lobes)
## # weights: 12 (6 variable)
## initial value 209.834947
## iter 10 value 140.112316
## iter 20 value 140.109087
## iter 30 value 140.107806
## iter 40 value 140.107086
## iter 40 value 140.107086
## final value 140.107086
## converged
summary(multinom2.l)
## Call:
## multinom(formula = Diagnostic ~ logAvgThickness + Age, data = train.data_lobes)
##
## Coefficients:
## (Intercept) logAvgThickness Age
## MCI 14.16578 6.695053 -0.2093745
## CTL 21.08276 11.418877 -0.3216516
##
## Std. Errors:
## (Intercept) logAvgThickness Age
## MCI 4.697883 5.942391 0.06054105
## CTL 4.728078 6.004256 0.06203983
##
## Residual Deviance: 280.2142
## AIC: 292.2142
m.multi.nova2.l <-
multinom(Diagnostic ~ logAvgThickness + Age, data = train.data_lobes)
## # weights: 12 (6 variable)
## initial value 209.834947
## iter 10 value 140.112316
## iter 20 value 140.109087
## iter 30 value 140.107806
## iter 40 value 140.107086
## iter 40 value 140.107086
## final value 140.107086
## converged
stargazer(m.multi.nova2.l, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## MCI CTL
## (1) (2)
## ----------------------------------------------
## logAvgThickness 6.695 11.419*
## (5.942) (6.004)
##
## Age -0.209*** -0.322***
## (0.061) (0.062)
##
## Constant 14.166*** 21.083***
## (4.698) (4.728)
##
## ----------------------------------------------
## Akaike Inf. Crit. 292.214 292.214
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z2.l <-
summary(m.multi.nova2.l)$coefficients / summary(m.multi.nova2.l)$standard.errors
p2.l <- (1 - pnorm(abs(z2.l), 0, 1)) * 2
t(p2.l)
## MCI CTL
## (Intercept) 0.0025667970 8.232205e-06
## logAvgThickness 0.2598862925 5.719767e-02
## Age 0.0005434175 2.164803e-07
#Para facilitar a interpreta??o:
coef.multi2.l = exp(coef(m.multi.nova2.l))
t(coef.multi2.l)
## MCI CTL
## (Intercept) 1.419450e+06 1.432598e+09
## logAvgThickness 8.083971e+02 9.102385e+04
## Age 8.110915e-01 7.249507e-01
#Previsoes
predicted.classes.multi.nova2.l <- m.multi.nova2.l %>% predict(test.data_lobes, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova2.l == test.data_lobes$Diagnostic)
## [1] 0.6470588
# Summary
confusionMatrix(predicted.classes.multi.nova2.l, test.data_lobes$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction AD MCI CTL
## AD 4 4 2
## MCI 0 0 0
## CTL 2 10 29
##
## Overall Statistics
##
## Accuracy : 0.6471
## 95% CI : (0.5007, 0.7757)
## No Information Rate : 0.6078
## P-Value [Acc > NIR] : 0.336847
##
## Kappa : 0.2772
##
## Mcnemar's Test P-Value : 0.002905
##
## Statistics by Class:
##
## Class: AD Class: MCI Class: CTL
## Sensitivity 0.66667 0.0000 0.9355
## Specificity 0.86667 1.0000 0.4000
## Pos Pred Value 0.40000 NaN 0.7073
## Neg Pred Value 0.95122 0.7255 0.8000
## Prevalence 0.11765 0.2745 0.6078
## Detection Rate 0.07843 0.0000 0.5686
## Detection Prevalence 0.19608 0.0000 0.8039
## Balanced Accuracy 0.76667 0.5000 0.6677
#ROC
multiclass.roc(
as.numeric(test.data_lobes$Diagnostic),
as.numeric(predicted.classes.multi.nova2.l),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data_lobes$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova2.l), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova2.l) with 3 levels of as.numeric(test.data_lobes$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.7007
multinom4.l <- multinom(Diagnostic ~ K_age_decay, data = train.data_lobes)
## # weights: 9 (4 variable)
## initial value 209.834947
## iter 10 value 166.495736
## iter 20 value 163.713324
## iter 30 value 162.929041
## iter 40 value 162.585252
## iter 50 value 162.482038
## iter 60 value 162.444256
## final value 162.437240
## converged
summary(multinom4.l)
## Call:
## multinom(formula = Diagnostic ~ K_age_decay, data = train.data_lobes)
##
## Coefficients:
## (Intercept) K_age_decay
## MCI 14.74292 40.87597
## CTL 15.54866 40.72614
##
## Std. Errors:
## (Intercept) K_age_decay
## MCI 4.941796 14.50007
## CTL 4.519881 13.18511
##
## Residual Deviance: 324.8745
## AIC: 332.8745
m.multi.nova4.l <-
multinom(Diagnostic ~ K_age_decay, data = train.data_lobes)
## # weights: 9 (4 variable)
## initial value 209.834947
## iter 10 value 166.495736
## iter 20 value 163.713324
## iter 30 value 162.929041
## iter 40 value 162.585252
## iter 50 value 162.482038
## iter 60 value 162.444256
## final value 162.437240
## converged
stargazer(m.multi.nova4.l, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## MCI CTL
## (1) (2)
## ----------------------------------------------
## K_age_decay 40.876*** 40.726***
## (14.500) (13.185)
##
## Constant 14.743*** 15.549***
## (4.942) (4.520)
##
## ----------------------------------------------
## Akaike Inf. Crit. 332.874 332.874
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z4.l <-
summary(m.multi.nova4.l)$coefficients / summary(m.multi.nova4.l)$standard.errors
p4.l <- (1 - pnorm(abs(z4.l), 0, 1)) * 2
t(p4.l)
## MCI CTL
## (Intercept) 0.002851474 0.0005815886
## K_age_decay 0.004817077 0.0020096769
#Para facilitar a interpreta??o:
coef.multi4.l = exp(coef(m.multi.nova4.l))
t(coef.multi4.l)
## MCI CTL
## (Intercept) 2.527948e+06 5.658420e+06
## K_age_decay 5.652091e+17 4.865632e+17
#Previsoes
predicted.classes.multi.nova4.l <- m.multi.nova4.l %>% predict(test.data_lobes, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova4.l == test.data_lobes$Diagnostic)
## [1] 0.6078431
# Summary
confusionMatrix(predicted.classes.multi.nova4.l, test.data_lobes$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction AD MCI CTL
## AD 0 0 0
## MCI 0 0 0
## CTL 6 14 31
##
## Overall Statistics
##
## Accuracy : 0.6078
## 95% CI : (0.4611, 0.7416)
## No Information Rate : 0.6078
## P-Value [Acc > NIR] : 0.5609
##
## Kappa : 0
##
## Mcnemar's Test P-Value : NA
##
## Statistics by Class:
##
## Class: AD Class: MCI Class: CTL
## Sensitivity 0.0000 0.0000 1.0000
## Specificity 1.0000 1.0000 0.0000
## Pos Pred Value NaN NaN 0.6078
## Neg Pred Value 0.8824 0.7255 NaN
## Prevalence 0.1176 0.2745 0.6078
## Detection Rate 0.0000 0.0000 0.6078
## Detection Prevalence 0.0000 0.0000 1.0000
## Balanced Accuracy 0.5000 0.5000 0.5000
#ROC
multiclass.roc(
as.numeric(test.data_lobes$Diagnostic),
as.numeric(predicted.classes.multi.nova4.l),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data_lobes$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova4.l), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova4.l) with 3 levels of as.numeric(test.data_lobes$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.5
multinom5.l <- multinom(Diagnostic ~ logAvgThickness_age_decay, data = train.data_lobes)
## # weights: 9 (4 variable)
## initial value 209.834947
## iter 10 value 167.905676
## iter 20 value 167.694099
## iter 30 value 167.608259
## iter 40 value 167.555311
## iter 50 value 167.529721
## iter 60 value 167.514570
## final value 167.510532
## converged
summary(multinom5.l)
## Call:
## multinom(formula = Diagnostic ~ logAvgThickness_age_decay, data = train.data_lobes)
##
## Coefficients:
## (Intercept) logAvgThickness_age_decay
## MCI -2.753920 9.139153
## CTL -5.044509 16.896897
##
## Std. Errors:
## (Intercept) logAvgThickness_age_decay
## MCI 7.397744 18.32682
## CTL 6.800633 16.84847
##
## Residual Deviance: 335.0211
## AIC: 343.0211
m.multi.nova5.l <-
multinom(Diagnostic ~ logAvgThickness_age_decay, data = train.data_lobes)
## # weights: 9 (4 variable)
## initial value 209.834947
## iter 10 value 167.905676
## iter 20 value 167.694099
## iter 30 value 167.608259
## iter 40 value 167.555311
## iter 50 value 167.529721
## iter 60 value 167.514570
## final value 167.510532
## converged
stargazer(m.multi.nova5.l, type = "text")
##
## ======================================================
## Dependent variable:
## ----------------------------
## MCI CTL
## (1) (2)
## ------------------------------------------------------
## logAvgThickness_age_decay 9.139 16.897
## (18.327) (16.848)
##
## Constant -2.754 -5.045
## (7.398) (6.801)
##
## ------------------------------------------------------
## Akaike Inf. Crit. 343.021 343.021
## ======================================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z5.l <-
summary(m.multi.nova5.l)$coefficients / summary(m.multi.nova5.l)$standard.errors
p5.l <- (1 - pnorm(abs(z5.l), 0, 1)) * 2
t(p5.l)
## MCI CTL
## (Intercept) 0.7096957 0.4582264
## logAvgThickness_age_decay 0.6180074 0.3159216
#Para facilitar a interpreta??o:
coef.multi5.l = exp(coef(m.multi.nova5.l))
t(coef.multi5.l)
## MCI CTL
## (Intercept) 6.367776e-02 6.444621e-03
## logAvgThickness_age_decay 9.312871e+03 2.178859e+07
#Previsoes
predicted.classes.multi.nova5.l <- m.multi.nova5.l %>% predict(test.data_lobes, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova5.l == test.data_lobes$Diagnostic)
## [1] 0.6078431
# Summary
confusionMatrix(predicted.classes.multi.nova5.l, test.data_lobes$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction AD MCI CTL
## AD 0 0 0
## MCI 0 0 0
## CTL 6 14 31
##
## Overall Statistics
##
## Accuracy : 0.6078
## 95% CI : (0.4611, 0.7416)
## No Information Rate : 0.6078
## P-Value [Acc > NIR] : 0.5609
##
## Kappa : 0
##
## Mcnemar's Test P-Value : NA
##
## Statistics by Class:
##
## Class: AD Class: MCI Class: CTL
## Sensitivity 0.0000 0.0000 1.0000
## Specificity 1.0000 1.0000 0.0000
## Pos Pred Value NaN NaN 0.6078
## Neg Pred Value 0.8824 0.7255 NaN
## Prevalence 0.1176 0.2745 0.6078
## Detection Rate 0.0000 0.0000 0.6078
## Detection Prevalence 0.0000 0.0000 1.0000
## Balanced Accuracy 0.5000 0.5000 0.5000
#ROC
multiclass.roc(
as.numeric(test.data_lobes$Diagnostic),
as.numeric(predicted.classes.multi.nova5.l),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data_lobes$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova5.l), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova5.l) with 3 levels of as.numeric(test.data_lobes$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.5
multinom0.l <- multinom(Diagnostic ~ K_corrected + Age + ESC, data = train.data_lobes)
## # weights: 15 (8 variable)
## initial value 209.834947
## iter 10 value 134.356267
## iter 20 value 130.835410
## iter 30 value 126.999669
## iter 40 value 126.229308
## iter 50 value 126.048718
## iter 60 value 126.033128
## iter 70 value 125.993333
## iter 80 value 125.981032
## iter 90 value 125.973457
## iter 100 value 125.966099
## final value 125.966099
## stopped after 100 iterations
summary(multinom0.l)
## Call:
## multinom(formula = Diagnostic ~ K_corrected + Age + ESC, data = train.data_lobes)
##
## Coefficients:
## (Intercept) K_corrected Age ESC
## MCI 33.36283 50.94232 -0.1145390 0.2157759
## CTL 35.28085 47.20934 -0.2117836 0.4720196
##
## Std. Errors:
## (Intercept) K_corrected Age ESC
## MCI 4.821311 4.997858 0.06726226 0.1206859
## CTL 4.941775 4.895750 0.06763127 0.1280171
##
## Residual Deviance: 251.9322
## AIC: 267.9322
m.multi.nova0.l <-
multinom(Diagnostic ~ K_corrected + Age + ESC, data = train.data_lobes)
## # weights: 15 (8 variable)
## initial value 209.834947
## iter 10 value 134.356267
## iter 20 value 130.835410
## iter 30 value 126.999669
## iter 40 value 126.229308
## iter 50 value 126.048718
## iter 60 value 126.033128
## iter 70 value 125.993333
## iter 80 value 125.981032
## iter 90 value 125.973457
## iter 100 value 125.966099
## final value 125.966099
## stopped after 100 iterations
stargazer(m.multi.nova0.l, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## MCI CTL
## (1) (2)
## ----------------------------------------------
## K_corrected 50.942*** 47.209***
## (4.998) (4.896)
##
## Age -0.115* -0.212***
## (0.067) (0.068)
##
## ESC 0.216* 0.472***
## (0.121) (0.128)
##
## Constant 33.363*** 35.281***
## (4.821) (4.942)
##
## ----------------------------------------------
## Akaike Inf. Crit. 267.932 267.932
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z0.l <-
summary(m.multi.nova0.l)$coefficients / summary(m.multi.nova0.l)$standard.errors
p0.l <- (1 - pnorm(abs(z0.l), 0, 1)) * 2
t(p0.l)
## MCI CTL
## (Intercept) 4.520606e-12 9.379164e-13
## K_corrected 0.000000e+00 0.000000e+00
## Age 8.859216e-02 1.739485e-03
## ESC 7.378991e-02 2.267707e-04
coef.multi0.l = exp(coef(m.multi.nova0.l))
t(coef.multi0.l)
## MCI CTL
## (Intercept) 3.085263e+14 2.100285e+15
## K_corrected 1.330352e+22 3.182405e+20
## Age 8.917772e-01 8.091398e-01
## ESC 1.240824e+00 1.603229e+00
#Previsoes
predicted.classes.multi.nova0.l <- m.multi.nova0.l %>% predict(test.data_lobes, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova0.l == test.data_lobes$Diagnostic)
## [1] 0.7058824
# Summary
confusionMatrix(predicted.classes.multi.nova0.l, test.data_lobes$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction AD MCI CTL
## AD 0 1 1
## MCI 1 11 5
## CTL 5 2 25
##
## Overall Statistics
##
## Accuracy : 0.7059
## 95% CI : (0.5617, 0.8251)
## No Information Rate : 0.6078
## P-Value [Acc > NIR] : 0.0969
##
## Kappa : 0.4371
##
## Mcnemar's Test P-Value : 0.2667
##
## Statistics by Class:
##
## Class: AD Class: MCI Class: CTL
## Sensitivity 0.00000 0.7857 0.8065
## Specificity 0.95556 0.8378 0.6500
## Pos Pred Value 0.00000 0.6471 0.7812
## Neg Pred Value 0.87755 0.9118 0.6842
## Prevalence 0.11765 0.2745 0.6078
## Detection Rate 0.00000 0.2157 0.4902
## Detection Prevalence 0.03922 0.3333 0.6275
## Balanced Accuracy 0.47778 0.8118 0.7282
#ROC
multiclass.roc(
as.numeric(test.data_lobes$Diagnostic),
as.numeric(predicted.classes.multi.nova0.l),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data_lobes$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova0.l), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova0.l) with 3 levels of as.numeric(test.data_lobes$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.72
###LogT + Age + YS
multinom0_2.l <- multinom(Diagnostic ~ logAvgThickness + Age + ESC, data = train.data_lobes)
## # weights: 15 (8 variable)
## initial value 209.834947
## iter 10 value 133.888425
## iter 20 value 131.025158
## iter 20 value 131.025157
## iter 30 value 130.994199
## iter 40 value 130.944127
## iter 50 value 130.936848
## final value 130.936690
## converged
summary(multinom0_2.l)
## Call:
## multinom(formula = Diagnostic ~ logAvgThickness + Age + ESC,
## data = train.data_lobes)
##
## Coefficients:
## (Intercept) logAvgThickness Age ESC
## MCI 8.335469 11.85025 -0.1896330 0.1987492
## CTL 10.425206 15.45735 -0.2797792 0.4501467
##
## Std. Errors:
## (Intercept) logAvgThickness Age ESC
## MCI 5.000568 6.140269 0.06270940 0.1114571
## CTL 5.016519 6.093216 0.06429726 0.1204567
##
## Residual Deviance: 261.8734
## AIC: 277.8734
m.multi.nova0_2.l <-
multinom(Diagnostic ~ logAvgThickness + Age + ESC, data = train.data_lobes)
## # weights: 15 (8 variable)
## initial value 209.834947
## iter 10 value 133.888425
## iter 20 value 131.025158
## iter 20 value 131.025157
## iter 30 value 130.994199
## iter 40 value 130.944127
## iter 50 value 130.936848
## final value 130.936690
## converged
stargazer(m.multi.nova0_2.l, type = "text")
##
## ==============================================
## Dependent variable:
## ----------------------------
## MCI CTL
## (1) (2)
## ----------------------------------------------
## logAvgThickness 11.850* 15.457**
## (6.140) (6.093)
##
## Age -0.190*** -0.280***
## (0.063) (0.064)
##
## ESC 0.199* 0.450***
## (0.111) (0.120)
##
## Constant 8.335* 10.425**
## (5.001) (5.017)
##
## ----------------------------------------------
## Akaike Inf. Crit. 277.873 277.873
## ==============================================
## Note: *p<0.1; **p<0.05; ***p<0.01
z0_2.l <-
summary(m.multi.nova0_2.l)$coefficients / summary(m.multi.nova0_2.l)$standard.errors
p0_2.l <- (1 - pnorm(abs(z0_2.l), 0, 1)) * 2
t(p0_2.l)
## MCI CTL
## (Intercept) 0.095533418 0.0376932119
## logAvgThickness 0.053616249 0.0111866689
## Age 0.002494593 0.0000135309
## ESC 0.074555356 0.0001862297
#Para facilitar a interpreta??o:
coef.multi0_2.l = exp(coef(m.multi.nova0_2.l))
t(coef.multi0_2.l)
## MCI CTL
## (Intercept) 4.169156e+03 3.369842e+04
## logAvgThickness 1.401196e+05 5.164664e+06
## Age 8.272627e-01 7.559507e-01
## ESC 1.219876e+00 1.568542e+00
#Previsoes
predicted.classes.multi.nova0_2.l <- m.multi.nova0_2.l %>% predict(test.data_lobes, type = "class")
#Model accuracy
mean(predicted.classes.multi.nova0_2.l == test.data_lobes$Diagnostic)
## [1] 0.6078431
# Summary
confusionMatrix(predicted.classes.multi.nova0_2.l, test.data_lobes$Diagnostic)
## Confusion Matrix and Statistics
##
## Reference
## Prediction AD MCI CTL
## AD 0 6 2
## MCI 2 6 4
## CTL 4 2 25
##
## Overall Statistics
##
## Accuracy : 0.6078
## 95% CI : (0.4611, 0.7416)
## No Information Rate : 0.6078
## P-Value [Acc > NIR] : 0.5609
##
## Kappa : 0.2837
##
## Mcnemar's Test P-Value : 0.3430
##
## Statistics by Class:
##
## Class: AD Class: MCI Class: CTL
## Sensitivity 0.0000 0.4286 0.8065
## Specificity 0.8222 0.8378 0.7000
## Pos Pred Value 0.0000 0.5000 0.8065
## Neg Pred Value 0.8605 0.7949 0.7000
## Prevalence 0.1176 0.2745 0.6078
## Detection Rate 0.0000 0.1176 0.4902
## Detection Prevalence 0.1569 0.2353 0.6078
## Balanced Accuracy 0.4111 0.6332 0.7532
#ROC
multiclass.roc(
as.numeric(test.data_lobes$Diagnostic),
as.numeric(predicted.classes.multi.nova0_2.l),
percent = F,
ci.alpha = 0.9,
stratified = FALSE,
plot = TRUE,
grid = TRUE,
legacy.axes = TRUE,
reuse.auc = TRUE,
print.auc = TRUE,
print.thres.col = "blue",
ci.type = "bars",
print.thres.cex = 0.7,
main = "ROC curve",
ylab = "Sensitivity (true positive rate)",
xlab = "1-Specificity (false positive rate)"
)
##
## Call:
## multiclass.roc.default(response = as.numeric(test.data_lobes$Diagnostic), predictor = as.numeric(predicted.classes.multi.nova0_2.l), percent = F, ci.alpha = 0.9, stratified = FALSE, plot = TRUE, grid = TRUE, legacy.axes = TRUE, reuse.auc = TRUE, print.auc = TRUE, print.thres.col = "blue", ci.type = "bars", print.thres.cex = 0.7, main = "ROC curve", ylab = "Sensitivity (true positive rate)", xlab = "1-Specificity (false positive rate)")
##
## Data: as.numeric(predicted.classes.multi.nova0_2.l) with 3 levels of as.numeric(test.data_lobes$Diagnostic): 1, 2, 3.
## Multi-class area under the curve: 0.746